Fechar

@InProceedings{EsmiSussSand:2014:InTuEq,
               author = "Esmi, E. and Sussner, P. and Sandri, Sandra Aparecida",
          affiliation = "{Universidade Estadual de Campinas (UNICAMP)} and {Universidade 
                         Estadual de Campinas (UNICAMP)} and {Instituto Nacional de 
                         Pesquisas Espaciais (INPE)}",
                title = "An introduction to tunable equivalence fuzzy associative 
                         memories",
            booktitle = "Proceedings...",
                 year = "2014",
                pages = "1604--1611",
         organization = "2014. IEEE International Conference on Fuzzy Systems, 
                         (FUZZ-IEEE)",
            publisher = "Institute of Electrical and Electronics Engineers Inc.",
             keywords = "Associative processing, Associative storage, Equivalence classes, 
                         Associative memory models, Computational effort, Fuzzy associative 
                         memory, Fuzzy equivalence, Hidden layers, Hidden nodes, Training 
                         algorithms, Training data, Fuzzy neural networks.",
             abstract = "In this paper, we present a new class of fuzzy associative 
                         memories (FAMs) called tunable equivalence fuzzy associative 
                         memories, for short tunable E-FAMs or TE-FAMs, that belong to the 
                         class \Θ-fuzzy associative memories (\Θ-FAMs). Recall 
                         that 0-FAMs represent fuzzy neural networks having a competitive 
                         hidden layer and weights that can be adjusted via a training 
                         algorithm. Like any associative memory model, \Θ-FAMs depend 
                         on the specification of a fundamental memory set. In contrast to 
                         other \Θ-FAM models, TE-FAMs make use of parametrized fuzzy 
                         equivalence measures that are associated with the hidden nodes and 
                         allow for the extraction of a fundamental memory set from the 
                         training data. The use of a smaller fundamental memory set than in 
                         previous articles on \Θ-FAMs reduces the computational 
                         effort involved in deriving the weights without decreasing the 
                         quality of the results.",
  conference-location = "Beijing",
      conference-year = "July 6-11, 2014.",
                  doi = "10.1109/FUZZ-IEEE.2014.6891851",
                  url = "http://dx.doi.org/10.1109/FUZZ-IEEE.2014.6891851",
                 isbn = "9781479920723",
                 issn = "1098-7584",
                label = "scopus 2015-01 EsmiSussSand:2014:InTuEq",
             language = "en",
        urlaccessdate = "27 abr. 2024"
}


Fechar